Use Keras with Tensorflow backend¶

In [2]:
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
from keras.utils import to_categorical

np.random.seed(1)

# generate fake data 
# class0 x shape=(100, 2)
x0 = np.random.normal(2, 1, (100, 2))
# class0 y shape=(100,)      
y0 = np.zeros(100)                      

# class1 x shape=(100, 2)
x1 = np.random.normal(-2, 1, (100, 2)) 
# class1 y shape=(100,)   
y1 = np.ones(100)   

# Stack arrays in sequence vertically (row wise), shape (200, 2)
x = np.vstack((x0, x1)) 
# Stack arrays in sequence horizontally (column wise). 
#This is equivalent to concatenation along the second axis, 
#except for 1-D arrays where it concatenates along the first axis. shape (200, )
y = np.hstack((y0, y1)) 


# define the keras model
model = Sequential()
model.add(Dense(10, input_dim=2, activation='relu'))
model.add(Dense(2, activation='softmax'))

# compile the keras model
model.compile(loss='sparse_categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])

# fit the keras model on the dataset, default batch_size=32
model.fit(x, y, epochs=10)
Epoch 1/10
200/200 [==============================] - 0s 240us/step - loss: 0.4816 - accuracy: 0.6300
Epoch 2/10
200/200 [==============================] - 0s 35us/step - loss: 0.3671 - accuracy: 0.9850
Epoch 3/10
200/200 [==============================] - 0s 35us/step - loss: 0.2913 - accuracy: 0.9950
Epoch 4/10
200/200 [==============================] - 0s 30us/step - loss: 0.2426 - accuracy: 0.9950
Epoch 5/10
200/200 [==============================] - 0s 30us/step - loss: 0.2079 - accuracy: 0.9950
Epoch 6/10
200/200 [==============================] - 0s 30us/step - loss: 0.1826 - accuracy: 0.9950
Epoch 7/10
200/200 [==============================] - 0s 30us/step - loss: 0.1622 - accuracy: 0.9950
Epoch 8/10
200/200 [==============================] - 0s 30us/step - loss: 0.1462 - accuracy: 0.9950
Epoch 9/10
200/200 [==============================] - 0s 35us/step - loss: 0.1333 - accuracy: 0.9950
Epoch 10/10
200/200 [==============================] - 0s 35us/step - loss: 0.1226 - accuracy: 0.9950
Out[2]:
<keras.callbacks.callbacks.History at 0x22d8efdc948>

Implementation on real-word dataset¶

In [4]:
import pandas as pd
from keras.models import Sequential
from keras.layers import Dense

# load the dataset
filename = 'diabetes.csv'
# first row is set as the column name of the dataframe 
dataframe = pd.read_csv(filename, header=0) 
array = dataframe.values
X = array[:,0:8]
y = array[:,8]

# define the keras model
model = Sequential()
model.add(Dense(12, input_dim=8, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))

# compile the keras model
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])

# fit the keras model on the dataset 
# verbose: setting of whether to show the progress bar; 0: not show, 1: show
model.fit(X, y, epochs=150, batch_size=10, verbose=0)  
Out[4]:
<keras.callbacks.callbacks.History at 0x19c5fa3a348>
In [5]:
# evaluate the keras model
_, accuracy = model.evaluate(X, y)
print('Accuracy: %.2f' % (accuracy*100))
768/768 [==============================] - 0s 72us/step
Accuracy: 76.30
In [6]:
# make class predictions with the model
predictions = model.predict_classes(X)

# summarize the first 5 cases
for i in range(5):
    print('%s => %d (expected %d)' % (X[i].tolist(), predictions[i], y[i]))
[6.0, 148.0, 72.0, 35.0, 0.0, 33.6, 0.627, 50.0] => 1 (expected 1)
[1.0, 85.0, 66.0, 29.0, 0.0, 26.6, 0.351, 31.0] => 0 (expected 0)
[8.0, 183.0, 64.0, 0.0, 0.0, 23.3, 0.672, 32.0] => 1 (expected 1)
[1.0, 89.0, 66.0, 23.0, 94.0, 28.1, 0.167, 21.0] => 0 (expected 0)
[0.0, 137.0, 40.0, 35.0, 168.0, 43.1, 2.288, 33.0] => 1 (expected 1)
In [ ]: